Multiple VisionThreads from different camera sources on Raspberry Pi

I was successfully able to run one visionthread from one camera source on the raspberry pi, but I had trouble when I tried running three cameras, each with their own visionthread.

import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;

import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;

import edu.wpi.cscore.CvSink;
import edu.wpi.cscore.CvSource;
import edu.wpi.cscore.MjpegServer;
import edu.wpi.cscore.UsbCamera;
import edu.wpi.cscore.VideoSource;
import edu.wpi.first.cameraserver.CameraServer;
import edu.wpi.first.networktables.NetworkTableInstance;
import edu.wpi.first.vision.VisionPipeline;
import edu.wpi.first.vision.VisionThread;
import edu.wpi.first.networktables.NetworkTable;
import edu.wpi.first.networktables.NetworkTableEntry;

import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint2f;
import org.opencv.imgproc.Imgproc;
import org.opencv.core.Point;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.core.Rect;
import org.opencv.core.RotatedRect;


/*
   JSON format:
   {
       "team": <team number>,
       "ntmode": <"client" or "server", "client" if unspecified>
       "cameras": [
           {
               "name": <camera name>
               "path": <path, e.g. "/dev/video0">
               "pixel format": <"MJPEG", "YUYV", etc>   // optional
               "width": <video mode width>              // optional
               "height": <video mode height>            // optional
               "fps": <video mode fps>                  // optional
               "brightness": <percentage brightness>    // optional
               "white balance": <"auto", "hold", value> // optional
               "exposure": <"auto", "hold", value>      // optional
               "properties": [                          // optional
                   {
                       "name": <property name>
                       "value": <property value>
                   }
               ],
               "stream": {                              // optional
                   "properties": [
                       {
                           "name": <stream property name>
                           "value": <stream property value>
                       }
                   ]
               }
           }
       ]
   }
 */

public final class Main {
  private static String configFile = "/boot/frc.json";

  @SuppressWarnings("MemberName")
  public static class CameraConfig {
    public String name;
    public String path;
    public JsonObject config;
    public JsonElement streamConfig;
  }

  public static int team;
  public static boolean server;
  public static List<CameraConfig> cameraConfigs = new ArrayList<>();

  private Main() {
  }

  /**
   * Report parse error.
   */
  public static void parseError(String str) {
    System.err.println("config error in '" + configFile + "': " + str);
  }

  /**
   * Read single camera configuration.
   */
  public static boolean readCameraConfig(JsonObject config) {
    CameraConfig cam = new CameraConfig();

    // name
    JsonElement nameElement = config.get("name");
    if (nameElement == null) {
      parseError("could not read camera name");
      return false;
    }
    cam.name = nameElement.getAsString();
    System.out.println("Camera name is " + cam.name);

    // path
    JsonElement pathElement = config.get("path");
    if (pathElement == null) {
      parseError("camera '" + cam.name + "': could not read path");
      return false;
    }
    cam.path = pathElement.getAsString();

    // stream properties
    cam.streamConfig = config.get("stream");
    
    cam.config = config;

    cameraConfigs.add(cam);
    return true;
  }

  /**
   * Read configuration file.
   */
  @SuppressWarnings("PMD.CyclomaticComplexity")
  public static boolean readConfig() {
    // parse file
    JsonElement top;
    try {
      top = new JsonParser().parse(Files.newBufferedReader(Paths.get(configFile)));
    } catch (IOException ex) {
      System.err.println("could not open '" + configFile + "': " + ex);
      return false;
    }

    // top level must be an object
    if (!top.isJsonObject()) {
      parseError("must be JSON object");
      return false;
    }
    JsonObject obj = top.getAsJsonObject();

    // team number
    JsonElement teamElement = obj.get("team");
    if (teamElement == null) {
      parseError("could not read team number");
      return false;
    }
    team = teamElement.getAsInt();

    // ntmode (optional)
    if (obj.has("ntmode")) {
      String str = obj.get("ntmode").getAsString();
      if ("client".equalsIgnoreCase(str)) {
        server = false;
      } else if ("server".equalsIgnoreCase(str)) {
        server = true;
      } else {
        parseError("could not understand ntmode value '" + str + "'");
      }
    }

    // cameras
    JsonElement camerasElement = obj.get("cameras");
    if (camerasElement == null) {
      parseError("could not read cameras");
      return false;
    }
    JsonArray cameras = camerasElement.getAsJsonArray();
    for (JsonElement camera : cameras) {
      if (!readCameraConfig(camera.getAsJsonObject())) {
        return false;
      }
    }

    return true;
  }


  /**
   * Main.
   */
  static boolean middle = false;
  static char direction = 'r';
  public static void main(String... args) {

    if (args.length > 0) {
      configFile = args[0];
    }

    // read configuration
    if (!readConfig()) {
      return;
    }

    // start NetworkTables
    NetworkTableInstance ntinst = NetworkTableInstance.getDefault();
    if (server) {
      System.out.println("Setting up NetworkTables server");
      ntinst.startServer();
    } else {
      System.out.println("Setting up NetworkTables client for team " + team);
      ntinst.startClientTeam(team);
    }
    
   
    CameraServer leftInst;
    CameraServer middleInst;
    CameraServer rightInst;

    rightInst = CameraServer.getInstance();
    UsbCamera rightCamera = new UsbCamera("Right Camera", "/dev/video0");
    MjpegServer rightCameraServer= rightInst.startAutomaticCapture(rightCamera);
    rightCamera.setConnectionStrategy(VideoSource.ConnectionStrategy.kKeepOpen);
  
    leftInst = CameraServer.getInstance();
    UsbCamera leftCamera = new UsbCamera("Left Camera","/dev/video1");
    MjpegServer leftCameraServer = leftInst.startAutomaticCapture(leftCamera);
    leftCamera.setConnectionStrategy(VideoSource.ConnectionStrategy.kKeepOpen);
    

    middleInst = CameraServer.getInstance();
    UsbCamera middleCamera = new UsbCamera("Middle Camera", "/dev/video2");
    MjpegServer middleCameraserver = middleInst.startAutomaticCapture(middleCamera);
    middleCamera.setConnectionStrategy(VideoSource.ConnectionStrategy.kKeepOpen);
  
    

    Mat leftCameraMat = new Mat();
    Mat middleCameraMat = new Mat();
    Mat rightCameraMat = new Mat();

    CvSink cvSinkLeft = leftInst.getVideo();
    CvSource leftOutput= leftInst.putVideo("Left Camera Stream", 160, 120);

    CvSink cvSinkMiddle = middleInst.getVideo();
    CvSource middleOutput= middleInst.putVideo("Middle Camera Stream", 160, 120);

    CvSink cvSinkRight = rightInst.getVideo();
    CvSource rightOutput= rightInst.putVideo("Right Camera Stream", 160, 120);


    
      VisionThread leftCameraThread = new VisionThread(leftCamera,
              new GripPipeline(), pipeline -> {
               

              cvSinkLeft.grabFrame(leftCameraMat);
              Mat tmpMat = leftCameraMat;
              tmpMat = leftCameraMat.t();
              Core.flip(leftCameraMat, tmpMat, -1);
              Imgproc.rectangle(tmpMat, new Point(0,0), new Point(40,40), new Scalar(255,255,255));
            
              System.out.println("No contours found; Left Camera");
              leftOutput.putFrame(tmpMat);
                
      
        
      });

      VisionThread middleCameraThread = new VisionThread(middleCamera,
      new GripPipeline(), pipeline -> {
       
    	cvSinkMiddle.grabFrame(middleCamera);
        Mat tmpMat = middleCameraMat;
        tmpMat = middleCameraMat.t();
        Core.flip(middleCameraserver, tmpMat, -1);
        Imgproc.rectangle(tmpMat, new Point(0,0), new Point(40,40), new Scalar(255,255,255));
      
        System.out.println("No contours found; Left Camera");
        middleOutput.putFrame(tmpMat);
        


});
     
      VisionThread rightCameraThread = new VisionThread(rightCamera,
      new GripPipeline(), pipeline -> {
        cvSinkRight.grabFrame(rightCameraMat);
        Mat tmpMat = rightCameraMat;
        tmpMat = rightCameraMat.t();
        Core.flip(rightCameraMat, tmpMat, -1);
        Imgproc.rectangle(tmpMat, new Point(0,0), new Point(40,40), new Scalar(255,255,255));
      
        System.out.println("No contours found; Left Camera");
        rightOutput.putFrame(tmpMat);

});

      leftCameraThread.start();
      //middleCameraThread.start();
      rightCameraThread.start();
    
    

    // loop forever
    for (;;) {
      try {
        Thread.sleep(10000);
      } catch (InterruptedException ex) {
        return;
      }
    }
  }
}

The issue was that the image processing for the left stream would use the image from the right camera rather than the left camera. Same thing with the middle camera.
Does anyone know how to run three cameras, with each with their own visionthread?

Disclaimer: Never used Java for FRC, only C++.

You should use getVideo(VideoSource camera), not getVideo().

1 Like

Thank you for this suggestion, I did not realize that a video source could be passed as an argument. I will try this out as soon as I can.

This topic was automatically closed 365 days after the last reply. New replies are no longer allowed.