USB Camera Image distorted

Hi all. I am getting a distorted image when I try to do image capture. I’m at my wits end with this, and it completely ruins any masking. You will note the band on the left side, which is actually from the right side of the proper image, and the top band which has a different brightness than the rest. Below that is the code for my subsystem.
Pardon the vast stretches of commented code, I started with the 2015 vision sample and have implemented/modified it gradually. Planning to clean it up once I get it fully working.
Also, LCameraServer is a modified version of the wpi library from another post on here, thought it might correct my issue. It made me run less glitchy but didn’t solve this problem.

Any suggestions?

 * Watery Tart subsystem for 2016 FRC Stronghold game
 * Team 5401 Fightin' Robotic Owls
 * FROGramming team
 * Based off 2015 Vision Retro Sample
 * You can't expect to wield supreme executive power just because some watery tart threw a sword at you.

#include "WateryTart.h"
#include "../RobotMap.h"
#include "Commands/LockTarget.h"
//#include "SmartDashboard/SmartDashboard.h"
//#include "LiveWindow/LiveWindow.h"

//#include <math.h>
//#include <vector>

	//Structure to represent the scores for the various tests used for target identification
	struct Scores {
		double Area;
		double Aspect;
	double XFirstPixel, YFirstPixel, XUpLeftCorner, YUpLeftCorner, XDownRightCorner, YDownRightCorner, RectHeight, RectWidth, Aspect;

//	Image* frame;
//	Image* binaryFrame;
//	Image* TargetFrame;
	IMAQdxSession session;
	IMAQdxError imaqErrorEnum;

	int imaqError;
	double AREA_MINIMUM = 0.5; //Default Area minimum for particle as a percentage of total image area
	double LONG_RATIO = 2.22; //Tote long side = 26.9 / Tote height = 12.1 = 2.22
	double SHORT_RATIO = 1.4; //Tote short side = 16.9 / Tote height = 12.1 = 1.4
	double SCORE_MIN = 75.0;  //Minimum score to be considered a tote
	double VIEW_ANGLE = 60; //View angle fo camera, set to Axis m1011 by default, 64 for m1013, 51.7 for 206, 52 for HD3000 square, 60 for HD3000 640x480
	double ASPECT_GOAL	= 1;  //Aspect Ratio of the target in our targeting range
	ParticleFilterCriteria2 criteria[1];
	ParticleFilterOptions2 filterOptions = {0,0,1,1};
	Scores scores;

WateryTart::WateryTart() :
//Motor and sensor declarations here
	frame 		= imaqCreateImage(IMAQ_IMAGE_RGB, 0);
	binaryFrame = imaqCreateImage(IMAQ_IMAGE_U8, 0);
	TargetFrame = imaqCreateImage(IMAQ_IMAGE_U8,0);

	imaqError = IMAQdxOpenCamera("cam0", IMAQdxCameraControlModeController, &session);
	if(imaqErrorEnum != IMAQdxErrorSuccess) {
		DriverStation::ReportError("IMAQdxOpenCamera error: " + std::to_string((long)imaqError) + "
	imaqError = IMAQdxConfigureGrab(session);
	if(imaqErrorEnum != IMAQdxErrorSuccess) {
		DriverStation::ReportError("IMAQdxConfigureGrab error: " + std::to_string((long)imaqError) + "

void WateryTart::InitDefaultCommand()
	//SetDefaultCommand(new LockTarget());

/* INTENT: Search function
 * Upon the press of a button, a command will invoke this function to check the image for the scoring U, and evaluate whether we have a shooting vector
 * It will return a rumble to the controller and splash a green box on the dashboard
 * Ideally, this will take place on an on board raspberry pi or arduino board, but that is version 2.0
void WateryTart::Search(Range Hue, Range Sat, Range Val)
double WaitTime = 3;
int Particle_No = 0;

    // create images
//	frame = imaqCreateImage(IMAQ_IMAGE_RGB, 0);
//	binaryFrame = imaqCreateImage(IMAQ_IMAGE_U8, 0);
//	TargetFrame = imaqCreateImage(IMAQ_IMAGE_U8,0);
	//Put default values to SmartDashboard so fields will appear
	SmartDashboard::PutNumber("Tote hue min", Hue.minValue);
	SmartDashboard::PutNumber("Tote hue max", Hue.maxValue);
	SmartDashboard::PutNumber("Tote sat min", Sat.minValue);
	SmartDashboard::PutNumber("Tote sat max", Sat.maxValue);
	SmartDashboard::PutNumber("Tote val min", Val.minValue);
	SmartDashboard::PutNumber("Tote val max", Val.maxValue);
	SmartDashboard::PutNumber("Area min %", AREA_MINIMUM);
	SmartDashboard::PutNumber("Cycle Time", WaitTime);

	//read file in from disk. For this example to run you need to copy image.jpg from the SampleImages folder to the
	//directory shown below using FTP or SFTP:
	//Two different pictures here, just referring to one or the other based on commented line, leave commented and uncomment section below to use camera
//	imaqError = imaqReadFile(frame, "//home//lvuser//SampleImages//Goalimage20.png", NULL, NULL);
// This starts acquisition from the camera, uncomment once calibrated with the files above.

	IMAQdxGrab(session, frame, true, NULL); //Takes the image from "session" and stores it in "frame"
	if(imaqErrorEnum != IMAQdxErrorSuccess) {
		SmartDashboard::PutNumber("Error Code", imaqError);
		DriverStation::ReportError("IMAQdxGrab error: " + std::to_string((long)imaqError) + "

	//Threshold the image looking for ring light color

	LCameraServer::GetInstance()->SetImage(frame);  //Send original image to dashboard to assist in tweaking mask.
//	Wait(WaitTime); //Part of test code to cycle between the filtered image and the color image
	imaqError = imaqColorThreshold(binaryFrame, frame, 255, IMAQ_RGB, &Hue, &Sat, &Val);

	//Send particle count to dashboard
	int numParticles = 0;
	imaqError = imaqCountParticles(binaryFrame, 1, &numParticles);
	SmartDashboard::PutNumber("Masked particles", numParticles);

	//Replaces the SendtoDashboard function without error handling
//	LCameraServer::GetInstance()->SetImage(binaryFrame); //Send masked image to dashboard to assist in tweaking mask.

	//filter out small particles
	float areaMin = SmartDashboard::GetNumber("Area min %", AREA_MINIMUM);
	criteria[0] = {IMAQ_MT_AREA_BY_IMAGE_AREA, areaMin, 100, false, false};
	imaqError = imaqParticleFilter4(binaryFrame, binaryFrame, criteria, 1, &filterOptions, NULL, NULL);

	if(numParticles > 0) {
		//Measure particles and sort by particle size  //Here's the thing, ParticleReport is a defined thing in imaq
		std::vector<ParticleReport> particles;
		for(int particleIndex = 0; particleIndex < numParticles; particleIndex++)
			ParticleReport par;
			imaqMeasureParticle(binaryFrame, particleIndex, 0, IMAQ_MT_AREA_BY_IMAGE_AREA, &(par.PercentAreaToImageArea));
			imaqMeasureParticle(binaryFrame, particleIndex, 0, IMAQ_MT_AREA, &(par.Area));
			imaqMeasureParticle(binaryFrame, particleIndex, 0, IMAQ_MT_BOUNDING_RECT_TOP, &(par.BoundingRectTop));
			imaqMeasureParticle(binaryFrame, particleIndex, 0, IMAQ_MT_BOUNDING_RECT_LEFT, &(par.BoundingRectLeft));
			imaqMeasureParticle(binaryFrame, particleIndex, 0, IMAQ_MT_BOUNDING_RECT_BOTTOM, &(par.BoundingRectBottom));
			imaqMeasureParticle(binaryFrame, particleIndex, 0, IMAQ_MT_BOUNDING_RECT_RIGHT, &(par.BoundingRectRight));
		sort(particles.begin(), particles.end(), CompareParticleSizes);
		//This example only scores the largest particle. Extending to score all particles and choosing the desired one is left as an exercise
		//for the reader. Note that this scores and reports information about a single particle (single L shaped target). To get accurate information
		//about the location of the tote (not just the distance) you will need to correlate two adjacent targets in order to find the true center of the tote.
		scores.Aspect = AspectScore(;
		SmartDashboard::PutNumber("Aspect", scores.Aspect);
		scores.Area = AreaScore(;
		SmartDashboard::PutNumber("Area", scores.Area);
		bool isTarget = scores.Area > SCORE_MIN && scores.Aspect > SCORE_MIN;

	//Send particle count after filtering to dashboard
		imaqError = imaqCountParticles(binaryFrame, 1, &numParticles);
		SmartDashboard::PutNumber("Filtered particles", numParticles);
		imaqMeasureParticle(binaryFrame, Particle_No, false, IMAQ_MT_FIRST_PIXEL_X, &XFirstPixel);
		imaqMeasureParticle(binaryFrame, Particle_No, false, IMAQ_MT_FIRST_PIXEL_Y, &YFirstPixel);
		imaqMeasureParticle(binaryFrame, Particle_No, false, IMAQ_MT_BOUNDING_RECT_LEFT, &XUpLeftCorner);
		imaqMeasureParticle(binaryFrame, Particle_No, false, IMAQ_MT_BOUNDING_RECT_TOP, &YUpLeftCorner);
		imaqMeasureParticle(binaryFrame, Particle_No, false, IMAQ_MT_BOUNDING_RECT_RIGHT, &XDownRightCorner);
		imaqMeasureParticle(binaryFrame, Particle_No, false, IMAQ_MT_BOUNDING_RECT_BOTTOM, &YDownRightCorner);
		imaqMeasureParticle(binaryFrame, Particle_No, false, IMAQ_MT_BOUNDING_RECT_WIDTH, &RectHeight);
		imaqMeasureParticle(binaryFrame, Particle_No, false, IMAQ_MT_BOUNDING_RECT_HEIGHT, &RectWidth);
		SmartDashboard::PutNumber("First Pixel - X", XFirstPixel);
		SmartDashboard::PutNumber("First Pixel - Y", YFirstPixel);
		SmartDashboard::PutNumber("LeftRectTop-X", XUpLeftCorner);
		SmartDashboard::PutNumber("LeftRectTop-Y", YUpLeftCorner);
		SmartDashboard::PutNumber("RightRectDown-X", XDownRightCorner);
		SmartDashboard::PutNumber("RightRectDown - Y", YDownRightCorner);
		SmartDashboard::PutNumber("Rectangle Height", RectHeight);
		SmartDashboard::PutNumber("Rectangle Width", RectWidth);
		SmartDashboard::PutNumber("Aspect Ratio", Aspect);
		Aspect = (RectWidth / RectHeight);
		scores.Aspect = (Aspect / ASPECT_GOAL);

//		SmartDashboard::PutBoolean("IsTarget", isTarget);
//		double WateryTart::computeDistance (Image *image, ParticleReport report) {

//		Wait(WaitTime);
		imaqError = imaqDrawShapeOnImage(TargetFrame, binaryFrame, {YUpLeftCorner, XUpLeftCorner, RectWidth, RectHeight}, DrawMode::IMAQ_DRAW_INVERT, ShapeMode::IMAQ_SHAPE_RECT, 0.0f);
//		LCameraServer::GetInstance()->SetImage(TargetFrame); //Send masked image to dashboard to assist in tweaking mask.

		double normalizedWidth, targetWidth;
		int xRes, yRes;

		imaqGetImageSize(binaryFrame, &xRes, &yRes);
		normalizedWidth = 2*(XDownRightCorner - XUpLeftCorner)/xRes;
		SmartDashboard::PutNumber("Width", normalizedWidth);
		targetWidth = 7;

		double distance =  targetWidth/(normalizedWidth*12*tan(VIEW_ANGLE*M_PI/(180*2)));
		SmartDashboard::PutNumber("Distance", distance);
//	} else {
//		SmartDashboard::PutBoolean("IsTarget", false);
//	}

void WateryTart::Manual()
 * While a button is pressed, this will show a display with a crosshair that will allow manual aiming


 void WateryTart::Stop()
 * Not sure this will be needed but reserving a space for it so that we can clear image, or reinitialize variables, or anything associated with stopping


  void WateryTart::Reset()

 * Not sure what thsi might do or what might trigger it, but want to see if there's any kind of clear image cache or something

	//Comparator function for sorting particles. Returns true if particle 1 is larger
	static bool WateryTart::CompareParticleSizes(ParticleReport particle1, ParticleReport particle2)
		//we want descending sort order
		return particle1.PercentAreaToImageArea > particle2.PercentAreaToImageArea;
	 * Converts a ratio with ideal value of 1 to a score. The resulting function is piecewise
	 * linear going from (0,0) to (1,100) to (2,0) and is 0 for all inputs outside the range 0-2
	double WateryTart::ratioToScore(double ratio)
		return (fmax(0, fmin(100*(1-fabs(1-ratio)), 100)));
	double WateryTart::AreaScore(ParticleReport report)
		double boundingArea = (report.BoundingRectBottom - report.BoundingRectTop) * (report.BoundingRectRight - report.BoundingRectLeft);
		//Tape is 7" edge so 49" bounding rect. With 2" wide tape it covers 24" of the rect.
		return ratioToScore((49/24)*report.Area/boundingArea);
	 * Method to score if the aspect ratio of the particle appears to match the retro-reflective target. Target is 7"x7" so aspect should be 1
	double WateryTart::AspectScore(ParticleReport report)
		return ratioToScore(((report.BoundingRectRight-report.BoundingRectLeft)/(report.BoundingRectBottom-report.BoundingRectTop)));
	 * Computes the estimated distance to a target using the width of the particle in the image. For more information and graphics
	 * showing the math behind this approach see the Vision Processing section of the ScreenStepsLive documentation.
	 * @param image The image to use for measuring the particle estimated rectangle
	 * @param report The Particle Analysis Report for the particle
	 * @return The estimated distance to the target in feet.
	double WateryTart::computeDistance (Image *image, ParticleReport report) {
		double normalizedWidth, targetWidth;
		int xRes, yRes;
		imaqGetImageSize(image, &xRes, &yRes);
		normalizedWidth = 2*(report.BoundingRectRight - report.BoundingRectLeft)/xRes;
		SmartDashboard::PutNumber("Width", normalizedWidth);
		targetWidth = 7;
		return  targetWidth/(normalizedWidth*12*tan(VIEW_ANGLE*M_PI/(180*2)));

And my h file:

 * Targeting subsystem header for 2016 FRC Stronghold game
 * Team 5401 Fightin' Robotic Owls
 * FROGramming team
 * Come and see the violence inherent in the system. Help! Help! I'm being repressed!

#include "Commands/Subsystem.h"
#include "WPILib.h"
#include "Libraries/LCameraServer.h"

class WateryTart: public Subsystem
	// It's desirable that everything possible under private except
	// for methods that implement subsystem capabilities
//	USBCamera *MainCam;
	Image* frame;
	Image* binaryFrame;
	Image* TargetFrame;
	IMAQdxSession session;

	//Declares the parts of the robot necessary for this subsystem

	void InitDefaultCommand();
	void Search(Range, Range, Range);
	void Manual();
	void Stop();
	void Reset();
	void SendToDashboard(Image*, int);
	static bool CompareParticleSizes(ParticleReport, ParticleReport);
	double ratioToScore(double);
	double AreaScore(ParticleReport);
	double AspectScore(ParticleReport);
	double computeDistance (Image, ParticleReport);