|
|
|
![]() |
|
|||||||
|
||||||||
|
|
Thread Tools | Rate Thread | Display Modes |
|
#1
|
|||
|
|||
|
Camera Tracking help please
This year, my team has for the first time decided to actually use the camera instead of it just being a place holder on the robot. The only problem with this is that I don't know how to program the camera to track the reflective targets on the field. I, so far, have gotten as far as getting the camera to take a picture and filter it down to the green box it is going to track. Now I need to get the robot it react and change its position accordingly. Any help is appreciated. A copy of my code is attached if anyone wants to check if i already messed something up.
package edu.wpi.first.wpilibj.templates; import edu.wpi.first.wpilibj.IterativeRobot; import edu.wpi.first.wpilibj.Joystick; import edu.wpi.first.wpilibj.RobotDrive; import edu.wpi.first.wpilibj.camera.AxisCamera; import edu.wpi.first.wpilibj.image.BinaryImage; import edu.wpi.first.wpilibj.image.ColorImage; import edu.wpi.first.wpilibj.image.CriteriaCollection; import edu.wpi.first.wpilibj.image.NIVision; import edu.wpi.first.wpilibj.image.NIVisionException; import edu.wpi.first.wpilibj.image.ParticleAnalysisReport ; /** * The VM is configured to automatically run this class, and to call the * functions corresponding to each mode, as described in the IterativeRobot * documentation. If you change the name of this class or the package after * creating this project, you must also update the manifest file in the resource * directory. */ public class Main extends IterativeRobot { AxisCamera camera = AxisCamera.getInstance(); Joystick leftStick = new Joystick(1); Joystick rightStick = new Joystick(2); Joystick joypad = new Joystick(3); CriteriaCollection cc; RobotDrive tank = new RobotDrive(1,2,3,4); /** * This function is run when the robot is first started up and should be * used for any initialization code. */ public void robotInit() { cc = new CriteriaCollection(); cc.addCriteria(NIVision.MeasurementType.IMAQ_MT_AR EA, 30, 400, false); cc.addCriteria(NIVision.MeasurementType.IMAQ_MT_AR EA, 40, 400, false); } /** * This function is called periodically during autonomous */ public void autonomousPeriodic() { } /** * This function is called periodically during operator control */ public void teleopPeriodic() { tank.tankDrive(leftStick, rightStick); if(joypad.getRawButton(1)){ centerCalculate(); } } /** * This function is called periodically during test mode */ public void testPeriodic() { } private void centerCalculate() { ColorImage image = null; BinaryImage thresholdImage = null; BinaryImage bigObjectsImage = null; BinaryImage convexHullImage = null; BinaryImage filteredImage = null; try{ image = camera.getImage(); thresholdImage = image.thresholdRGB(0, 45, 25, 225, 0, 45); bigObjectsImage = thresholdImage.removeSmallObjects(false, 2); convexHullImage = bigObjectsImage.convexHull(false); filteredImage = convexHullImage.particleFilter(cc); ParticleAnalysisReport[] reports = filteredImage.getOrderedParticleAnalysisReports(); for(int i = 0; i <reports.length +1;i++){ System.out.println(reports[i].center_mass_x); } }catch(Exception ex){ }finally{ } try{ filteredImage.free(); convexHullImage.free(); bigObjectsImage.free(); thresholdImage.free(); image.free(); }catch(NIVisionException ex){ } } } |
| Thread Tools | |
| Display Modes | Rate This Thread |
|
|